Импорт библиотек¶

In [1]:
import warnings
warnings.filterwarnings('ignore')

import os
import io
import json
import shutil
import zipfile
import requests

import numpy as np
import pandas as pd
import seaborn as sns
import tensorflow as tf
from tqdm.auto import trange
import matplotlib.pyplot as plt

from scipy.signal import welch
from scipy.stats import kurtosis, skew
from scipy.signal.windows import blackman

from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.preprocessing import PowerTransformer, StandardScaler, MinMaxScaler
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, classification_report, confusion_matrix, ConfusionMatrixDisplay, roc_curve, auc

from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LogisticRegression

from xgboost import XGBClassifier
from lightgbm import LGBMClassifier
from catboost import CatBoostClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier, HistGradientBoostingClassifier, StackingClassifier, VotingClassifier

from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.utils import to_categorical, plot_model

Загрузка и первичная обработка данных¶

Загрузка данных с GitHub¶

In [2]:
url = "https://github.com/tiiuae/UAV-Realistic-Fault-Dataset/archive/refs/heads/main.zip"
response = requests.get(url)

zip_file = zipfile.ZipFile(io.BytesIO(response.content))
zip_file.extractall("UAV-Realistic-Fault-Dataset")

Парсинг данных¶

In [3]:
base_dir = r'UAV-Realistic-Fault-Dataset\UAV-Realistic-Fault-Dataset-main\Dataset'
processed_dir = 'ProcessedDataset'
os.makedirs(processed_dir, exist_ok=True)

for folder in trange(5, desc='Progress', colour='blue'):
    folder_path = os.path.join(base_dir, str(folder))
    file_number = 0
    
    class_folder = os.path.join(processed_dir, str(folder))
    os.makedirs(class_folder, exist_ok=True)

    for subfolder in os.listdir(folder_path):
        subfolder_path = os.path.join(folder_path, subfolder)
        if os.path.isdir(subfolder_path):
            combined_data = []
            
            for file in os.listdir(subfolder_path):
                if file.endswith('SensorCombined.jsonl'):
                    
                    with open(os.path.join(subfolder_path, file), 'r') as jsonl_file:
                        for line in jsonl_file:
                            data = json.loads(line)
                            combined_data.append(data)

                        df = pd.DataFrame(combined_data)                        
                        for i, row in enumerate(df['gyro_rad']):
                            df.loc[i, ['gx', 'gy', 'gz']] = row
                            
                        for i, row in enumerate(df['accelerometer_m_s2']):
                            df.loc[i, ['ax', 'ay', 'az']] = row

                        df.drop(columns=["gyro_rad", "accelerometer_m_s2", 
                                         "accelerometer_timestamp_relative", 
                                         "accelerometer_clipping"], inplace=True)
                        
                        df.rename(columns={'timestamp': 'time', 
                                           'gyro_integral_dt': 'gInt', 
                                           'accelerometer_integral_dt': 'aInt'}, inplace=True)
                        
                        df.to_csv(os.path.join(class_folder, f"class_{folder}_number_{file_number}.csv"), index=False)
                        file_number += 1

    combined_class_data = []
    for file in os.listdir(class_folder):
        if file.endswith('.csv'):
            file_path = os.path.join(class_folder, file)
            class_df = pd.read_csv(file_path)
            combined_class_data.append(class_df)

    if combined_class_data:
        final_df = pd.concat(combined_class_data, ignore_index=True)
        final_df.to_csv(os.path.join(class_folder, f"data_{folder}.csv"), index=False)
Progress:   0%|          | 0/5 [00:00<?, ?it/s]
In [4]:
# Убедимся, что данные корректно записались
test_data = pd.read_csv(r"ProcessedDataset\0\data_0.csv")
test_data.head(5)
Out[4]:
time gInt aInt gx gy gz ax ay az
0 947975062 4998 4998 0.008529 0.004348 -0.001269 0.145165 -0.034807 -9.828311
1 947976312 4996 4996 0.001431 0.005915 -0.001310 0.116024 0.020439 -9.769987
2 947983469 4998 4998 0.016173 0.007350 0.000573 0.092866 0.060234 -9.827328
3 947996546 4994 4994 -0.001588 0.000100 -0.003505 0.065213 0.016342 -9.804899
4 947995456 4995 4995 0.008024 0.012801 0.000042 0.082880 0.064342 -9.800008

Удаление ненужных данных¶

In [5]:
folder_path = 'UAV-Realistic-Fault-Dataset'
if os.path.exists(folder_path):
    shutil.rmtree(folder_path)

Анализ данных¶

Вычисление частоты дискретизации¶

Частота дискретизации акселерометра и гироскопа нигде не указана, поэтому вычислим её самостоятельно.

In [6]:
lst = []
for clas in range(5):
    for num in range(20):
        
        if clas == 0 and num == 19: continue                                           # Так как в классе "0" меньше на 1 файл
        data = pd.read_csv(rf"ProcessedDataset\{clas}\class_{clas}_number_{num}.csv")  # Считываем данные
        data = data.sort_values(by='time')                                             # Сортируем по времени
        time_differences = (data['time'].diff().abs()).dropna()                        # Рассчитываем разницу между всеми соседними
        average_difference = time_differences.mean() / 10**6                           # Переводим в секунды из микросекунд
        lst.append(1 / average_difference)                                             # Сохраняем

print(f"Рассчитанная частота дискретизации: {round(sum(lst) / len(lst))}")
Рассчитанная частота дискретизации: 195

Построение 3D спектрограмм¶

In [7]:
def plot_3d_spectrogram(clas, window_size=256):
    """
    Строит 3D спектрограмму для заданного класса данных.

    Параметры:
    clas (str): Имя класса, для которого будет построена спектрограмма.
    window_size (int, optional): Размер окна для обработки данных. 
                                  По умолчанию 256.

    Возвращает:
    None: Функция отображает графики, но не возвращает никаких значений.
    
    Описание:
    Функция считывает данные из CSV файла, применяет оконную функцию Блэкмана-Наталла,
    выполняет быстрое преобразование Фурье (БПФ) для перекрывающихся сегментов данных 
    и строит 3D спектрограммы для каждого канала данных (gx, gy, gz, ax, ay, az).
    Каждая спектрограмма отображает амплитуды в зависимости от частоты и времени.
    """
    
    channels = ["gx", "gy", "gz", "ax", "ay", "az"]
    data = pd.read_csv(rf"ProcessedDataset\{clas}\data_{clas}.csv")[["time", *channels]].values
    data[:, 6] += 9.81

    Fs = 195                    # Частота дискретизации
    N = len(data)               # Количество строк данных
    overlap = window_size // 2  # Наложение окон (50%)
    
    num_windows = (N - overlap) // (window_size - overlap)  # Количество всех окон
    amplitude = np.empty((num_windows, window_size, 6))     # Массив для амплитуд
    xf = np.fft.fftfreq(window_size, 1 / Fs)                # Получение частот для спектра
    window = blackman(window_size)                          # Окно Блэкмана-Наталла
    
    for i in range(num_windows):
        # Разбивка данных на перекрывающиеся сегменты
        start = i * (window_size - overlap)
        end = start + window_size
        segment = data[start:end, :]

        # Сортировка по времени и проверка "стыков" между разными фрагментами данных
        segment = pd.DataFrame(segment, columns=["time", *channels]).sort_values(by='time')
        time_differences = (segment['time'].diff().abs()).dropna()
        if max(time_differences) > 150000: continue
        segment = segment.drop(columns=["time"]).values  

        if segment.shape[0] == window_size:
            segment_windowed = segment * window[:, None]  # Наложение окна
            yf = np.fft.fft(segment_windowed, axis=0)     # Применение БПФ
            amplitude[i] = np.abs(yf)                     # Извлечение амплитуд

    # Отрисовка графиков
    fig = plt.figure(figsize=(20, 14))
    
    for i in range(6):
        ax = fig.add_subplot(2, 3, i + 1, projection='3d')
        
        time_axis = np.arange(num_windows)
        frequency_axis = xf[:window_size // 2]
        amplitude_channel = amplitude[:, :window_size // 2, i]
        time_axis_grid, frequency_axis_grid = np.meshgrid(time_axis, frequency_axis)
        
        ax.plot_surface(time_axis_grid, frequency_axis_grid, amplitude_channel.T, cmap='viridis')
        ax.set_title(f'3D Спектрограмма для {channels[i]}, class {clas}', size=12)
        ax.set_xlabel('Номер окна')
        ax.set_ylabel('Частота (Гц)')
        ax.set_zlabel('Амплитуда')
        
    # plt.savefig(f'3D_spectrogram_class_{clas}.png', dpi=300, bbox_inches='tight')
    plt.show()
In [8]:
for i in range(5): plot_3d_spectrogram(i)
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image

Извлечение признаков¶

In [9]:
# ЧО - частотная область, ВО - временная область
cols = ["ЧО Медиана", "ЧО Средняя", 
        "ЧО Std", "ЧО Max", "ЧО Min", 
        "ЧО 90 процентиль", "ЧО 75 процентиль", "ЧО 25 процентиль", 
        "ЧО Куртозис", "ЧО Ассиметрия", "ЧО Энергия", "ЧО Вариация",
        "ЧО Количество пиков", 
        "ЧО Средняя > 10 Гц", "ЧО Std > 10 Гц", "ЧО Энергия > 10 Гц",
        
        "ВО Медиана", "ВО Средняя", 
        "ВО Std", "ВО Max", "ВО Min", 
        "ВО 90 процентиль", "ВО 75 процентиль", "ВО 25 процентиль", 
        "ВО Куртозис", "ВО Ассиметрия", "ВО Энергия", "ВО Вариация", 
        "ВО Количество пиков", 
        "ВО Общая мощность", "ВО Средняя мощность"]

channels = ["gx", "gy", "gz", "ax", "ay", "az"]
cols = sum([[col[:3] + name + col[2:] for col in cols] for name in channels], [])
cols.append("Класс")

features_df = pd.DataFrame(columns=cols)

Fs = 195                     # Частота дискретизации
window_size = 256            # Размер окна
overlap = window_size // 2   # Наложение окон (50%)

window = blackman(window_size)                     # Окно Блэкмана-Наталла
more10hz = round(256 / Fs * 10)                    # Для частот более 10 Гц
frequencies = np.fft.fftfreq(window_size, 1 / Fs)  # Получение частот для спектра

for clas in trange(5, desc='Progress', colour='blue'):
    data = pd.read_csv(rf"ProcessedDataset\{clas}\data_{clas}.csv")[["time", *channels]].values
    num_windows = (len(data) - overlap) // (window_size - overlap)  # Количество всех окон
    
    for i in trange(num_windows, desc='Progress', colour='green'):
        # Разбивка данных на перекрывающиеся сегменты
        start = i * (window_size - overlap)
        end = start + window_size
        segment = data[start:end, :]

        # Сортировка по времени и проверка "стыков" между разными фрагментами данных
        segment = pd.DataFrame(segment, columns=["time", *channels]).sort_values(by='time')
        time_differences = (segment['time'].diff().abs()).dropna()
        if max(time_differences) > 150000: continue
        segment = segment.drop(columns=["time"]).values   

        # Наложение окна, применение БПФ и извлечение признаков
        if segment.shape[0] == window_size:
            segment_windowed = segment * window[:, None]  # Наложение окна
            yf = np.fft.fft(segment_windowed, axis=0)     # Применение БПФ
            amplitude = np.abs(yf)                        # Извлечение амплитуд # shape = (num_windows, window_size, 6)
            
            features = []
            for channel in range(amplitude.shape[1]):
                amp_half = amplitude[:overlap, channel]  # Берем только положительные частоты 
                seg = segment[:, channel]

                # Частотный спектр
                features.append(np.median(amp_half))          # Медиана
                features.append(np.mean(amp_half))            # Средняя амплитуда
                features.append(np.std(amp_half))             # Стандартное отклонение
                features.append(np.max(amp_half))             # Максимальная амплитуда
                features.append(np.min(amp_half))             # Минимальная амплитуда
                features.append(np.percentile(amp_half, 90))  # 90-й процентиль
                features.append(np.percentile(amp_half, 75))  # 75-й процентиль
                features.append(np.percentile(amp_half, 25))  # 25-й процентиль
                features.append(kurtosis(amp_half))           # Куртозис
                features.append(skew(amp_half))               # Ассиметрия
                features.append(np.trapz(amp_half ** 2))      # Энергия

                # Коэффициент вариации
                features.append(np.std(amp_half) / np.mean(amp_half))  
                
                # Количество пиков (выбросов)
                features.append(np.sum(amp_half > np.median(amp_half) + 0.5 * np.std(amp_half)))

                # Исследование частот > 10 Гц
                amp_half_10 = amp_half[more10hz:]
                features.append(np.mean(amp_half_10))        # Средняя амплитуда частот > 10 Гц
                features.append(np.std(amp_half_10))         # Стандартное отклонение частот > 10 Гц
                features.append(np.trapz(amp_half_10 ** 2))  # Энергия частот > 10 Гц
            
                # Временная область
                features.append(np.median(seg))          # Медиана
                features.append(np.mean(seg))            # Средняя амплитуда
                features.append(np.std(seg))             # Стандартное отклонение
                features.append(np.max(seg))             # Максимальная амплитуда
                features.append(np.min(seg))             # Минимальная амплитуда
                features.append(np.percentile(seg, 90))  # 90-й процентиль
                features.append(np.percentile(seg, 75))  # 75-й процентиль
                features.append(np.percentile(seg, 25))  # 25-й процентиль
                features.append(kurtosis(seg))           # Куртозис
                features.append(skew(seg))               # Ассиметрия
                features.append(np.trapz(seg ** 2))      # Энергия

                # Коэффициент вариации
                features.append(np.std(seg) / np.mean(seg))  
                
                # Количество пиков (выбросов)
                features.append(np.sum(seg > np.median(seg) + 0.5 * np.std(seg)))
                
                # Плотность мощности
                f, Pxx = welch(seg, fs=Fs, window='hann', nperseg=window_size, noverlap=overlap)  
                features.append(np.sum(Pxx[:overlap]))   # Общая мощность
                features.append(np.mean(Pxx[:overlap]))  # Средняя мощность

            # Добавляем метку класса и сохраняем извлеченные признаки
            features.append(clas)
            features_df.loc[len(features_df)] = features

features_df.to_csv(f"features.csv", index=False)
Progress:   0%|          | 0/5 [00:00<?, ?it/s]
Progress:   0%|          | 0/2623 [00:00<?, ?it/s]
Progress:   0%|          | 0/2896 [00:00<?, ?it/s]
Progress:   0%|          | 0/2817 [00:00<?, ?it/s]
Progress:   0%|          | 0/2846 [00:00<?, ?it/s]
Progress:   0%|          | 0/2803 [00:00<?, ?it/s]

Построение, тестирование и сравнение моделей¶

In [12]:
# Загрузка данных
features_df = pd.read_csv('features.csv')
X = features_df.drop(columns = ["Класс"])
y = features_df["Класс"].values

# Разделение на обучающую и тестовую выборки
random_state = 42
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=random_state)

# Приведение к нормальному распределению (Преобразование Yeo-Johnson)
power_transformer = PowerTransformer(method='yeo-johnson')
X_train = power_transformer.fit_transform(X_train)
X_test = power_transformer.transform(X_test)

# Стандартизация данных
standard_scaler = StandardScaler()
X_train = standard_scaler.fit_transform(X_train)
X_test = standard_scaler.transform(X_test)

# Нормализация данных
scaler = MinMaxScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

# Инициализация моделей
models = {
    "K-Nearest Neighbors": KNeighborsClassifier(n_neighbors=4, leaf_size=1, p=1, metric='manhattan', weights='distance'),
    "Naive Bayes": GaussianNB(var_smoothing=0.1),
    "Support Vector Machine": SVC(C=800, gamma=0.1, class_weight='balanced', tol=0.01, probability=True, random_state=random_state),
    "Logistic Regression": LogisticRegression(C=300, solver='newton-cg', random_state=random_state),
    "Decision Tree": DecisionTreeClassifier(max_depth=30, criterion='entropy', random_state=random_state),
    "Perceptron": MLPClassifier(hidden_layer_sizes=(144, 256, 64, 16, 5), max_iter=400, random_state=random_state),
    "Random Forest": RandomForestClassifier(n_estimators=400, max_depth=20, random_state=random_state),
    "CatBoost": CatBoostClassifier(silent=True, random_state=random_state),
    "AdaBoost": AdaBoostClassifier(estimator=DecisionTreeClassifier(max_depth=6, random_state=random_state), n_estimators=400, learning_rate=0.1),  
    "Gradient Boosting": GradientBoostingClassifier(random_state=random_state),
    "Histogram-based Gradient Boosting": HistGradientBoostingClassifier(random_state=random_state),
    "XGBoost": XGBClassifier(random_state=random_state),
    "LightGBM": LGBMClassifier(verbose=-1, random_state=random_state),

    "Stacking Classifier": StackingClassifier(
        estimators=[("LGBMClassifier", LGBMClassifier(verbose=-1, random_state=random_state)),
                    ("Random Forest", RandomForestClassifier(n_estimators=400, max_depth=20, random_state=random_state)),
                    ("XGBClassifier", XGBClassifier(random_state=random_state)),
                    ("CatBoost", CatBoostClassifier(silent=True, random_state=random_state)),
                    ("Histogram-based Gradient Boosting", HistGradientBoostingClassifier(random_state=random_state)),
                    ("K-Nearest Neighbors", KNeighborsClassifier(n_neighbors=4, leaf_size=1, p=1, metric='manhattan', weights='distance')),
                    ("Support Vector Machine", SVC(C=800, gamma=0.1, class_weight='balanced', tol=0.01, probability=True, random_state=random_state))],
        final_estimator=LogisticRegression(random_state=random_state)),
    
    "Voting Classifier": VotingClassifier(
        estimators=[("LGBMClassifier", LGBMClassifier(verbose=-1, random_state=random_state)),
                    ("Random Forest", RandomForestClassifier(n_estimators=400, max_depth=20, random_state=random_state)),
                    ("XGBClassifier", XGBClassifier(random_state=random_state)),
                    ("CatBoost", CatBoostClassifier(silent=True, random_state=random_state)),
                    ("Histogram-based Gradient Boosting", HistGradientBoostingClassifier(random_state=random_state)),
                    ("K-Nearest Neighbors", KNeighborsClassifier(n_neighbors=4, leaf_size=1, p=1, metric='manhattan', weights='distance')),
                    ("Support Vector Machine", SVC(C=800, gamma=0.1, class_weight='balanced', tol=0.01, probability=True, random_state=random_state)),
                    ("Logistic Regression", LogisticRegression(C=300, solver='newton-cg', random_state=random_state)),
                    ("Perceptron", MLPClassifier(hidden_layer_sizes=(144, 256, 64, 16, 5), max_iter=400, random_state=random_state))], 
        voting='soft')
}

# Обучение и тестирование моделей
for model_name, model in models.items():
    model.fit(X_train, y_train)
    y_pred = model.predict(X_test)
    
    print(f"\n{model_name}:\n")
    print(classification_report(y_test, y_pred))
    print(f"Accuracy: {round(accuracy_score(y_test, y_pred) * 100, 2)} %\n", )
    print("-" * 53)
K-Nearest Neighbors:

              precision    recall  f1-score   support

         0.0       0.98      0.98      0.98       528
         1.0       0.96      0.97      0.96       561
         2.0       0.93      0.92      0.92       538
         3.0       0.89      0.89      0.89       582
         4.0       0.91      0.91      0.91       550

    accuracy                           0.93      2759
   macro avg       0.93      0.93      0.93      2759
weighted avg       0.93      0.93      0.93      2759

Accuracy: 93.29 %

-----------------------------------------------------

Naive Bayes:

              precision    recall  f1-score   support

         0.0       0.63      1.00      0.77       528
         1.0       0.96      0.80      0.87       561
         2.0       0.87      0.50      0.64       538
         3.0       0.62      0.62      0.62       582
         4.0       0.75      0.77      0.76       550

    accuracy                           0.74      2759
   macro avg       0.77      0.74      0.73      2759
weighted avg       0.77      0.74      0.73      2759

Accuracy: 73.69 %

-----------------------------------------------------

Support Vector Machine:

              precision    recall  f1-score   support

         0.0       0.93      0.95      0.94       528
         1.0       0.95      0.95      0.95       561
         2.0       0.91      0.91      0.91       538
         3.0       0.89      0.88      0.89       582
         4.0       0.91      0.90      0.91       550

    accuracy                           0.92      2759
   macro avg       0.92      0.92      0.92      2759
weighted avg       0.92      0.92      0.92      2759

Accuracy: 91.88 %

-----------------------------------------------------

Logistic Regression:

              precision    recall  f1-score   support

         0.0       0.92      0.91      0.92       528
         1.0       0.90      0.92      0.91       561
         2.0       0.81      0.84      0.83       538
         3.0       0.84      0.82      0.83       582
         4.0       0.85      0.84      0.85       550

    accuracy                           0.87      2759
   macro avg       0.87      0.87      0.87      2759
weighted avg       0.87      0.87      0.87      2759

Accuracy: 86.59 %

-----------------------------------------------------

Decision Tree:

              precision    recall  f1-score   support

         0.0       0.97      0.97      0.97       528
         1.0       0.95      0.96      0.95       561
         2.0       0.89      0.89      0.89       538
         3.0       0.88      0.84      0.86       582
         4.0       0.89      0.91      0.90       550

    accuracy                           0.91      2759
   macro avg       0.91      0.91      0.91      2759
weighted avg       0.91      0.91      0.91      2759

Accuracy: 91.3 %

-----------------------------------------------------

Perceptron:

              precision    recall  f1-score   support

         0.0       0.98      0.88      0.93       528
         1.0       0.70      0.97      0.81       561
         2.0       0.88      0.87      0.87       538
         3.0       0.89      0.78      0.83       582
         4.0       0.95      0.81      0.87       550

    accuracy                           0.86      2759
   macro avg       0.88      0.86      0.86      2759
weighted avg       0.88      0.86      0.86      2759

Accuracy: 86.08 %

-----------------------------------------------------

Random Forest:

              precision    recall  f1-score   support

         0.0       0.99      0.99      0.99       528
         1.0       0.98      0.99      0.98       561
         2.0       0.95      0.95      0.95       538
         3.0       0.93      0.91      0.92       582
         4.0       0.95      0.95      0.95       550

    accuracy                           0.96      2759
   macro avg       0.96      0.96      0.96      2759
weighted avg       0.96      0.96      0.96      2759

Accuracy: 95.87 %

-----------------------------------------------------

CatBoost:

              precision    recall  f1-score   support

         0.0       1.00      0.99      0.99       528
         1.0       0.97      0.98      0.98       561
         2.0       0.95      0.95      0.95       538
         3.0       0.94      0.93      0.93       582
         4.0       0.96      0.95      0.96       550

    accuracy                           0.96      2759
   macro avg       0.96      0.96      0.96      2759
weighted avg       0.96      0.96      0.96      2759

Accuracy: 96.16 %

-----------------------------------------------------

AdaBoost:

              precision    recall  f1-score   support

         0.0       0.99      0.98      0.99       528
         1.0       0.97      0.98      0.98       561
         2.0       0.87      0.95      0.91       538
         3.0       0.78      0.89      0.83       582
         4.0       0.98      0.74      0.84       550

    accuracy                           0.91      2759
   macro avg       0.92      0.91      0.91      2759
weighted avg       0.92      0.91      0.91      2759

Accuracy: 90.72 %

-----------------------------------------------------

Gradient Boosting:

              precision    recall  f1-score   support

         0.0       0.99      0.98      0.98       528
         1.0       0.96      0.97      0.96       561
         2.0       0.93      0.93      0.93       538
         3.0       0.90      0.91      0.90       582
         4.0       0.95      0.93      0.94       550

    accuracy                           0.94      2759
   macro avg       0.94      0.94      0.94      2759
weighted avg       0.94      0.94      0.94      2759

Accuracy: 94.35 %

-----------------------------------------------------

Histogram-based Gradient Boosting:

              precision    recall  f1-score   support

         0.0       0.99      0.99      0.99       528
         1.0       0.98      0.98      0.98       561
         2.0       0.95      0.96      0.95       538
         3.0       0.94      0.93      0.93       582
         4.0       0.95      0.95      0.95       550

    accuracy                           0.96      2759
   macro avg       0.96      0.96      0.96      2759
weighted avg       0.96      0.96      0.96      2759

Accuracy: 96.23 %

-----------------------------------------------------

XGBoost:

              precision    recall  f1-score   support

         0.0       1.00      0.99      0.99       528
         1.0       0.98      0.99      0.98       561
         2.0       0.96      0.95      0.96       538
         3.0       0.93      0.93      0.93       582
         4.0       0.95      0.95      0.95       550

    accuracy                           0.96      2759
   macro avg       0.96      0.96      0.96      2759
weighted avg       0.96      0.96      0.96      2759

Accuracy: 96.27 %

-----------------------------------------------------

LightGBM:

              precision    recall  f1-score   support

         0.0       0.99      0.99      0.99       528
         1.0       0.98      0.98      0.98       561
         2.0       0.95      0.96      0.95       538
         3.0       0.93      0.93      0.93       582
         4.0       0.95      0.95      0.95       550

    accuracy                           0.96      2759
   macro avg       0.96      0.96      0.96      2759
weighted avg       0.96      0.96      0.96      2759

Accuracy: 96.19 %

-----------------------------------------------------

Stacking Classifier:

              precision    recall  f1-score   support

         0.0       0.99      0.99      0.99       528
         1.0       0.98      0.99      0.98       561
         2.0       0.96      0.96      0.96       538
         3.0       0.94      0.94      0.94       582
         4.0       0.96      0.96      0.96       550

    accuracy                           0.97      2759
   macro avg       0.97      0.97      0.97      2759
weighted avg       0.97      0.97      0.97      2759

Accuracy: 96.59 %

-----------------------------------------------------

Voting Classifier:

              precision    recall  f1-score   support

         0.0       0.99      0.99      0.99       528
         1.0       0.98      0.99      0.98       561
         2.0       0.96      0.96      0.96       538
         3.0       0.93      0.94      0.93       582
         4.0       0.96      0.95      0.96       550

    accuracy                           0.96      2759
   macro avg       0.97      0.97      0.97      2759
weighted avg       0.96      0.96      0.96      2759

Accuracy: 96.48 %

-----------------------------------------------------
In [13]:
# Сравнение моделей по метрике точности accuracy

models_accuracy_dict = {}
for name, model in models.items():
    accuracy = accuracy_score(y_test, model.predict(X_test))
    models_accuracy_dict[name] = round(accuracy * 100, 3)

models_accuracy_df = pd.DataFrame({
    'Модель': models_accuracy_dict.keys(),
    'Точность': models_accuracy_dict.values()
})

models_accuracy_df = models_accuracy_df.sort_values(by='Точность', ascending=False)

plt.figure(figsize=(8, 6))
plt.barh(models_accuracy_df['Модель'], models_accuracy_df['Точность'], color='skyblue')
plt.xlabel('Точность (%)')
plt.title('Сравнение точности моделей')
plt.gca().invert_yaxis()
plt.grid(axis='x')
# plt.savefig('models_accuracy_comparison.png', dpi=1200, bbox_inches='tight')
plt.show()
No description has been provided for this image
In [14]:
# Оценка моделей по четырем метрикам точности

models_acc_dict = {}
for name, model in models.items():
    y_pred = model.predict(X_test)
    accuracy = accuracy_score(y_test, y_pred)
    precision = precision_score(y_test, y_pred, average='weighted')
    recall = recall_score(y_test, y_pred, average='weighted')
    f1 = f1_score(y_test, y_pred, average='weighted')

    models_acc_dict[name] = {
        'Accuracy': round(accuracy * 100, 3),
        'Precision': round(precision * 100, 3),
        'Recall': round(recall * 100, 3),
        'F1 Score': round(f1 * 100, 3),
    }

models_acc_df = pd.DataFrame(models_acc_dict).T
models_acc_df = models_acc_df.sort_values(by='Accuracy', ascending=False)
models_acc_df
Out[14]:
Accuracy Precision Recall F1 Score
Stacking Classifier 96.593 96.595 96.593 96.593
Voting Classifier 96.484 96.486 96.484 96.484
XGBoost 96.267 96.270 96.267 96.267
Histogram-based Gradient Boosting 96.231 96.229 96.231 96.230
LightGBM 96.194 96.199 96.194 96.197
CatBoost 96.158 96.155 96.158 96.154
Random Forest 95.868 95.857 95.868 95.860
Gradient Boosting 94.346 94.354 94.346 94.347
K-Nearest Neighbors 93.295 93.292 93.295 93.291
Support Vector Machine 91.881 91.874 91.881 91.874
Decision Tree 91.301 91.281 91.301 91.280
AdaBoost 90.721 91.589 90.721 90.696
Logistic Regression 86.589 86.625 86.589 86.592
Perceptron 86.082 87.877 86.082 86.311
Naive Bayes 73.686 76.717 73.686 73.304
In [15]:
# Построение матрицы ошибок для Stacking Classifier

plt.figure(figsize=(6, 5))
sns.heatmap(confusion_matrix(y_test, models.get("Stacking Classifier").predict(X_test)), 
            annot=True, 
            cmap=sns.color_palette("RdYlGn", n_colors=1000), 
            center=0, 
            fmt='d')

plt.title('Матрица ошибок для Stacking Classifier')
# plt.savefig('stacking_сlassifier_confusion_matrix.png', dpi=1200, bbox_inches='tight')
plt.show()
No description has been provided for this image
In [16]:
# Расчет точности с учетом ошибки в классе не более чем на 1

y_pred = models.get("Stacking Classifier").predict(X_test)

correct_predictions = 0
for true_class, predicted_class in zip(y_test, y_pred):
    if abs(true_class - predicted_class) <= 1:
        correct_predictions += 1

new_accuracy = correct_predictions / len(y_test)
print(f"Точность с учетом ошибки в классе не более чем на 1: {round(new_accuracy * 100, 2)} %")
Точность с учетом ошибки в классе не более чем на 1: 99.71 %
In [17]:
# Построение ROC-кривой для Stacking Classifier

voting_model = models["Stacking Classifier"]
y_score = voting_model.predict_proba(X_test)
classes = np.unique(y)

# Построение ROC-кривой для каждого класса
plt.figure(figsize=(10, 8))
for i in range(len(classes)):
    fpr, tpr, _ = roc_curve(y_test, y_score[:, i], pos_label=classes[i])
    roc_auc = auc(fpr, tpr)
    plt.plot(fpr, tpr, label=f'ROC curve for class {classes[i]} (area = {roc_auc:.5f})')

plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver Operating Characteristic (ROC) Curve')
plt.legend(loc='lower right')
plt.grid()
# plt.savefig('stacking_classifier_roc_curve.png', dpi=1200, bbox_inches='tight')
plt.show()
No description has been provided for this image

Пример использования GridSearchCV для поиска оптимальных параметров¶

In [18]:
param_grid = {
    'n_neighbors': [1, 2, 3, 4, 5],
    'leaf_size': [1, 2, 3],
    'metric': ['manhattan', 'euclidean'],
    'weights': ['uniform', 'distance'],
    'p': [1, 2],
}

grid_search = GridSearchCV(estimator=KNeighborsClassifier(), param_grid=param_grid, cv=5, scoring='accuracy', n_jobs=-1)
grid_search.fit(X_train, y_train)

print("Лучшие параметры: ", grid_search.best_params_)
print("Лучшая оценка перекрестной проверки: ", grid_search.best_score_)
Лучшие параметры:  {'leaf_size': 1, 'metric': 'manhattan', 'n_neighbors': 3, 'p': 1, 'weights': 'distance'}
Лучшая оценка перекрестной проверки:  0.9236128622367566

Создание, обучение и тестирование нейронной сети¶

In [19]:
np.random.seed(random_state)
tf.random.set_seed(random_state)

features_df = pd.read_csv('features.csv')
X = features_df.drop(columns = ["Класс"])
y = features_df["Класс"].values
y = to_categorical(y)

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=random_state)

scaler = MinMaxScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

model = Sequential()
model.add(Dense(128, input_shape=(186,), activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(5, activation='softmax'))

early_stopping = EarlyStopping(monitor='val_loss', patience=200, verbose=1, restore_best_weights=True)

model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
trained_model = model.fit(X_train, y_train, epochs=1000, batch_size=128, validation_split=0.2, callbacks=[early_stopping])

y_pred = model.predict(X_test, verbose=0)
y_pred_classes = np.argmax(y_pred, axis=1)
y_true = np.argmax(y_test, axis=1)
Epoch 1/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 2s 6ms/step - accuracy: 0.4889 - loss: 1.1808 - val_accuracy: 0.8066 - val_loss: 0.4973
Epoch 2/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.7538 - loss: 0.5807 - val_accuracy: 0.8320 - val_loss: 0.4059
Epoch 3/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.7911 - loss: 0.4908 - val_accuracy: 0.8456 - val_loss: 0.3838
Epoch 4/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8103 - loss: 0.4466 - val_accuracy: 0.8546 - val_loss: 0.3556
Epoch 5/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8165 - loss: 0.4313 - val_accuracy: 0.8501 - val_loss: 0.3522
Epoch 6/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8297 - loss: 0.4013 - val_accuracy: 0.8379 - val_loss: 0.3868
Epoch 7/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8363 - loss: 0.3996 - val_accuracy: 0.8546 - val_loss: 0.3420
Epoch 8/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8283 - loss: 0.4052 - val_accuracy: 0.8578 - val_loss: 0.3369
Epoch 9/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8462 - loss: 0.3759 - val_accuracy: 0.8519 - val_loss: 0.3306
Epoch 10/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8292 - loss: 0.3982 - val_accuracy: 0.8582 - val_loss: 0.3271
Epoch 11/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8422 - loss: 0.3698 - val_accuracy: 0.8519 - val_loss: 0.3462
Epoch 12/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8409 - loss: 0.3678 - val_accuracy: 0.8619 - val_loss: 0.3159
Epoch 13/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8475 - loss: 0.3573 - val_accuracy: 0.8601 - val_loss: 0.3254
Epoch 14/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8351 - loss: 0.3835 - val_accuracy: 0.8510 - val_loss: 0.3396
Epoch 15/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8497 - loss: 0.3493 - val_accuracy: 0.8655 - val_loss: 0.3143
Epoch 16/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8560 - loss: 0.3434 - val_accuracy: 0.8623 - val_loss: 0.3251
Epoch 17/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8517 - loss: 0.3466 - val_accuracy: 0.8605 - val_loss: 0.3111
Epoch 18/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8530 - loss: 0.3484 - val_accuracy: 0.8605 - val_loss: 0.3197
Epoch 19/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8551 - loss: 0.3439 - val_accuracy: 0.8619 - val_loss: 0.3103
Epoch 20/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8531 - loss: 0.3401 - val_accuracy: 0.8537 - val_loss: 0.3445
Epoch 21/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8528 - loss: 0.3550 - val_accuracy: 0.8551 - val_loss: 0.3165
Epoch 22/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8507 - loss: 0.3432 - val_accuracy: 0.8601 - val_loss: 0.3168
Epoch 23/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8506 - loss: 0.3519 - val_accuracy: 0.8623 - val_loss: 0.3203
Epoch 24/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8563 - loss: 0.3413 - val_accuracy: 0.8537 - val_loss: 0.3479
Epoch 25/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8622 - loss: 0.3258 - val_accuracy: 0.8673 - val_loss: 0.3053
Epoch 26/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8533 - loss: 0.3359 - val_accuracy: 0.8714 - val_loss: 0.3067
Epoch 27/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8628 - loss: 0.3278 - val_accuracy: 0.8700 - val_loss: 0.3050
Epoch 28/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8675 - loss: 0.3193 - val_accuracy: 0.8700 - val_loss: 0.3072
Epoch 29/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8659 - loss: 0.3265 - val_accuracy: 0.8546 - val_loss: 0.3513
Epoch 30/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8573 - loss: 0.3308 - val_accuracy: 0.8623 - val_loss: 0.3230
Epoch 31/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8687 - loss: 0.3204 - val_accuracy: 0.8587 - val_loss: 0.3321
Epoch 32/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8648 - loss: 0.3340 - val_accuracy: 0.8736 - val_loss: 0.2982
Epoch 33/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8667 - loss: 0.3252 - val_accuracy: 0.8569 - val_loss: 0.3348
Epoch 34/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8659 - loss: 0.3162 - val_accuracy: 0.8759 - val_loss: 0.3021
Epoch 35/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8771 - loss: 0.3081 - val_accuracy: 0.8714 - val_loss: 0.3094
Epoch 36/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8673 - loss: 0.3106 - val_accuracy: 0.8610 - val_loss: 0.3373
Epoch 37/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8690 - loss: 0.3098 - val_accuracy: 0.8705 - val_loss: 0.3167
Epoch 38/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8624 - loss: 0.3225 - val_accuracy: 0.8605 - val_loss: 0.3218
Epoch 39/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8729 - loss: 0.3067 - val_accuracy: 0.8659 - val_loss: 0.3086
Epoch 40/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8718 - loss: 0.3057 - val_accuracy: 0.8623 - val_loss: 0.3138
Epoch 41/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8726 - loss: 0.3004 - val_accuracy: 0.8777 - val_loss: 0.2893
Epoch 42/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8697 - loss: 0.3123 - val_accuracy: 0.8741 - val_loss: 0.3055
Epoch 43/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8726 - loss: 0.3106 - val_accuracy: 0.8678 - val_loss: 0.2934
Epoch 44/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8668 - loss: 0.3100 - val_accuracy: 0.8591 - val_loss: 0.3417
Epoch 45/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8714 - loss: 0.3061 - val_accuracy: 0.8696 - val_loss: 0.2955
Epoch 46/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8715 - loss: 0.2977 - val_accuracy: 0.8632 - val_loss: 0.3398
Epoch 47/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8760 - loss: 0.2994 - val_accuracy: 0.8619 - val_loss: 0.3272
Epoch 48/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8708 - loss: 0.3018 - val_accuracy: 0.8705 - val_loss: 0.3051
Epoch 49/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8750 - loss: 0.2983 - val_accuracy: 0.8682 - val_loss: 0.3036
Epoch 50/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8765 - loss: 0.2953 - val_accuracy: 0.8718 - val_loss: 0.3019
Epoch 51/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8808 - loss: 0.2902 - val_accuracy: 0.8564 - val_loss: 0.3412
Epoch 52/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8721 - loss: 0.3054 - val_accuracy: 0.8777 - val_loss: 0.3056
Epoch 53/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8766 - loss: 0.2913 - val_accuracy: 0.8687 - val_loss: 0.2964
Epoch 54/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8803 - loss: 0.2966 - val_accuracy: 0.8795 - val_loss: 0.2856
Epoch 55/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.8779 - loss: 0.2948 - val_accuracy: 0.8777 - val_loss: 0.3007
Epoch 56/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8752 - loss: 0.2968 - val_accuracy: 0.8863 - val_loss: 0.2859
Epoch 57/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8785 - loss: 0.2925 - val_accuracy: 0.8732 - val_loss: 0.2889
Epoch 58/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8736 - loss: 0.2979 - val_accuracy: 0.8741 - val_loss: 0.3049
Epoch 59/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8776 - loss: 0.2881 - val_accuracy: 0.8678 - val_loss: 0.3408
Epoch 60/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8806 - loss: 0.2804 - val_accuracy: 0.8723 - val_loss: 0.2967
Epoch 61/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8791 - loss: 0.2882 - val_accuracy: 0.8813 - val_loss: 0.2957
Epoch 62/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8797 - loss: 0.2806 - val_accuracy: 0.8723 - val_loss: 0.3031
Epoch 63/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8852 - loss: 0.2779 - val_accuracy: 0.8691 - val_loss: 0.3387
Epoch 64/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8767 - loss: 0.2861 - val_accuracy: 0.8483 - val_loss: 0.4013
Epoch 65/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8769 - loss: 0.2936 - val_accuracy: 0.8664 - val_loss: 0.3355
Epoch 66/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8779 - loss: 0.2856 - val_accuracy: 0.8759 - val_loss: 0.3049
Epoch 67/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8796 - loss: 0.2779 - val_accuracy: 0.8764 - val_loss: 0.2857
Epoch 68/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8827 - loss: 0.2914 - val_accuracy: 0.8755 - val_loss: 0.2906
Epoch 69/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8800 - loss: 0.2799 - val_accuracy: 0.8750 - val_loss: 0.2853
Epoch 70/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8830 - loss: 0.2718 - val_accuracy: 0.8705 - val_loss: 0.3065
Epoch 71/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8822 - loss: 0.2814 - val_accuracy: 0.8723 - val_loss: 0.3031
Epoch 72/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8840 - loss: 0.2685 - val_accuracy: 0.8791 - val_loss: 0.2806
Epoch 73/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8802 - loss: 0.2789 - val_accuracy: 0.8773 - val_loss: 0.2985
Epoch 74/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8821 - loss: 0.2761 - val_accuracy: 0.8804 - val_loss: 0.2881
Epoch 75/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8783 - loss: 0.2835 - val_accuracy: 0.8822 - val_loss: 0.2868
Epoch 76/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8835 - loss: 0.2809 - val_accuracy: 0.8687 - val_loss: 0.3342
Epoch 77/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8822 - loss: 0.2784 - val_accuracy: 0.8755 - val_loss: 0.3038
Epoch 78/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8841 - loss: 0.2752 - val_accuracy: 0.8727 - val_loss: 0.3198
Epoch 79/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8792 - loss: 0.2807 - val_accuracy: 0.8755 - val_loss: 0.3011
Epoch 80/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8863 - loss: 0.2695 - val_accuracy: 0.8696 - val_loss: 0.3265
Epoch 81/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8802 - loss: 0.2752 - val_accuracy: 0.8836 - val_loss: 0.2855
Epoch 82/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8852 - loss: 0.2710 - val_accuracy: 0.8813 - val_loss: 0.2846
Epoch 83/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8890 - loss: 0.2719 - val_accuracy: 0.8813 - val_loss: 0.2967
Epoch 84/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.8862 - loss: 0.2807 - val_accuracy: 0.8786 - val_loss: 0.3001
Epoch 85/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.8868 - loss: 0.2710 - val_accuracy: 0.8759 - val_loss: 0.2911
Epoch 86/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8765 - loss: 0.2857 - val_accuracy: 0.8813 - val_loss: 0.2923
Epoch 87/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8833 - loss: 0.2737 - val_accuracy: 0.8827 - val_loss: 0.2971
Epoch 88/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8848 - loss: 0.2751 - val_accuracy: 0.8859 - val_loss: 0.2705
Epoch 89/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8843 - loss: 0.2697 - val_accuracy: 0.8714 - val_loss: 0.3201
Epoch 90/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8868 - loss: 0.2783 - val_accuracy: 0.8854 - val_loss: 0.2806
Epoch 91/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8889 - loss: 0.2675 - val_accuracy: 0.8650 - val_loss: 0.3145
Epoch 92/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8812 - loss: 0.2732 - val_accuracy: 0.8782 - val_loss: 0.3104
Epoch 93/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8869 - loss: 0.2628 - val_accuracy: 0.8764 - val_loss: 0.2981
Epoch 94/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8840 - loss: 0.2787 - val_accuracy: 0.8936 - val_loss: 0.2652
Epoch 95/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8924 - loss: 0.2532 - val_accuracy: 0.8841 - val_loss: 0.2832
Epoch 96/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8954 - loss: 0.2521 - val_accuracy: 0.8818 - val_loss: 0.2795
Epoch 97/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8895 - loss: 0.2568 - val_accuracy: 0.8700 - val_loss: 0.3461
Epoch 98/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8886 - loss: 0.2678 - val_accuracy: 0.8809 - val_loss: 0.2891
Epoch 99/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8911 - loss: 0.2641 - val_accuracy: 0.8764 - val_loss: 0.3056
Epoch 100/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8864 - loss: 0.2631 - val_accuracy: 0.8895 - val_loss: 0.2715
Epoch 101/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8938 - loss: 0.2483 - val_accuracy: 0.8804 - val_loss: 0.3002
Epoch 102/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8898 - loss: 0.2639 - val_accuracy: 0.8813 - val_loss: 0.2922
Epoch 103/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8896 - loss: 0.2632 - val_accuracy: 0.8777 - val_loss: 0.3127
Epoch 104/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8893 - loss: 0.2569 - val_accuracy: 0.8899 - val_loss: 0.2594
Epoch 105/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8893 - loss: 0.2607 - val_accuracy: 0.8868 - val_loss: 0.2907
Epoch 106/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8935 - loss: 0.2493 - val_accuracy: 0.8890 - val_loss: 0.2627
Epoch 107/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8929 - loss: 0.2506 - val_accuracy: 0.8723 - val_loss: 0.3380
Epoch 108/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.8951 - loss: 0.2515 - val_accuracy: 0.8773 - val_loss: 0.2847
Epoch 109/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8947 - loss: 0.2486 - val_accuracy: 0.8841 - val_loss: 0.2734
Epoch 110/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8894 - loss: 0.2629 - val_accuracy: 0.8931 - val_loss: 0.2636
Epoch 111/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8948 - loss: 0.2529 - val_accuracy: 0.8922 - val_loss: 0.2563
Epoch 112/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8951 - loss: 0.2494 - val_accuracy: 0.8836 - val_loss: 0.2883
Epoch 113/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8963 - loss: 0.2514 - val_accuracy: 0.8786 - val_loss: 0.3075
Epoch 114/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8894 - loss: 0.2720 - val_accuracy: 0.8827 - val_loss: 0.2969
Epoch 115/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8926 - loss: 0.2478 - val_accuracy: 0.8909 - val_loss: 0.2601
Epoch 116/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8973 - loss: 0.2424 - val_accuracy: 0.8927 - val_loss: 0.2697
Epoch 117/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8998 - loss: 0.2444 - val_accuracy: 0.8886 - val_loss: 0.2677
Epoch 118/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8938 - loss: 0.2524 - val_accuracy: 0.8841 - val_loss: 0.2815
Epoch 119/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8983 - loss: 0.2328 - val_accuracy: 0.8872 - val_loss: 0.2846
Epoch 120/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8976 - loss: 0.2466 - val_accuracy: 0.8918 - val_loss: 0.2648
Epoch 121/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8978 - loss: 0.2460 - val_accuracy: 0.8832 - val_loss: 0.2821
Epoch 122/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8965 - loss: 0.2458 - val_accuracy: 0.8881 - val_loss: 0.2591
Epoch 123/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8967 - loss: 0.2501 - val_accuracy: 0.8868 - val_loss: 0.2712
Epoch 124/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8924 - loss: 0.2497 - val_accuracy: 0.8881 - val_loss: 0.2880
Epoch 125/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8930 - loss: 0.2473 - val_accuracy: 0.8800 - val_loss: 0.2925
Epoch 126/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8965 - loss: 0.2401 - val_accuracy: 0.8895 - val_loss: 0.2678
Epoch 127/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9001 - loss: 0.2373 - val_accuracy: 0.8886 - val_loss: 0.2725
Epoch 128/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9036 - loss: 0.2346 - val_accuracy: 0.8822 - val_loss: 0.2867
Epoch 129/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9036 - loss: 0.2323 - val_accuracy: 0.8872 - val_loss: 0.2782
Epoch 130/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9043 - loss: 0.2336 - val_accuracy: 0.8818 - val_loss: 0.2938
Epoch 131/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8999 - loss: 0.2375 - val_accuracy: 0.8795 - val_loss: 0.2967
Epoch 132/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9001 - loss: 0.2388 - val_accuracy: 0.8904 - val_loss: 0.2709
Epoch 133/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9032 - loss: 0.2276 - val_accuracy: 0.8764 - val_loss: 0.3278
Epoch 134/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9018 - loss: 0.2336 - val_accuracy: 0.8745 - val_loss: 0.3106
Epoch 135/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8996 - loss: 0.2388 - val_accuracy: 0.8813 - val_loss: 0.2886
Epoch 136/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8994 - loss: 0.2384 - val_accuracy: 0.8958 - val_loss: 0.2546
Epoch 137/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9040 - loss: 0.2279 - val_accuracy: 0.8818 - val_loss: 0.2886
Epoch 138/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8998 - loss: 0.2398 - val_accuracy: 0.8773 - val_loss: 0.3024
Epoch 139/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8965 - loss: 0.2452 - val_accuracy: 0.8895 - val_loss: 0.2718
Epoch 140/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9044 - loss: 0.2298 - val_accuracy: 0.8972 - val_loss: 0.2709
Epoch 141/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9069 - loss: 0.2228 - val_accuracy: 0.8868 - val_loss: 0.2912
Epoch 142/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8976 - loss: 0.2475 - val_accuracy: 0.8836 - val_loss: 0.2886
Epoch 143/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9007 - loss: 0.2432 - val_accuracy: 0.8863 - val_loss: 0.2851
Epoch 144/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9028 - loss: 0.2325 - val_accuracy: 0.8904 - val_loss: 0.2741
Epoch 145/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9058 - loss: 0.2351 - val_accuracy: 0.8967 - val_loss: 0.2563
Epoch 146/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9096 - loss: 0.2319 - val_accuracy: 0.8986 - val_loss: 0.2514
Epoch 147/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9000 - loss: 0.2416 - val_accuracy: 0.8863 - val_loss: 0.2879
Epoch 148/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9019 - loss: 0.2352 - val_accuracy: 0.8967 - val_loss: 0.2644
Epoch 149/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9055 - loss: 0.2287 - val_accuracy: 0.8836 - val_loss: 0.3212
Epoch 150/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9022 - loss: 0.2296 - val_accuracy: 0.8909 - val_loss: 0.2861
Epoch 151/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9078 - loss: 0.2209 - val_accuracy: 0.8981 - val_loss: 0.2499
Epoch 152/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9067 - loss: 0.2190 - val_accuracy: 0.9013 - val_loss: 0.2451
Epoch 153/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9131 - loss: 0.2174 - val_accuracy: 0.8976 - val_loss: 0.2595
Epoch 154/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.8999 - loss: 0.2294 - val_accuracy: 0.8863 - val_loss: 0.2768
Epoch 155/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9074 - loss: 0.2143 - val_accuracy: 0.8972 - val_loss: 0.2679
Epoch 156/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9062 - loss: 0.2262 - val_accuracy: 0.8904 - val_loss: 0.2731
Epoch 157/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9076 - loss: 0.2218 - val_accuracy: 0.8940 - val_loss: 0.2918
Epoch 158/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9082 - loss: 0.2196 - val_accuracy: 0.8899 - val_loss: 0.2954
Epoch 159/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9097 - loss: 0.2222 - val_accuracy: 0.8700 - val_loss: 0.3434
Epoch 160/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9103 - loss: 0.2221 - val_accuracy: 0.8972 - val_loss: 0.2579
Epoch 161/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9081 - loss: 0.2125 - val_accuracy: 0.8954 - val_loss: 0.2810
Epoch 162/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9105 - loss: 0.2165 - val_accuracy: 0.8705 - val_loss: 0.3455
Epoch 163/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9041 - loss: 0.2370 - val_accuracy: 0.8909 - val_loss: 0.2815
Epoch 164/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9104 - loss: 0.2190 - val_accuracy: 0.8909 - val_loss: 0.2763
Epoch 165/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9085 - loss: 0.2211 - val_accuracy: 0.8895 - val_loss: 0.2752
Epoch 166/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9120 - loss: 0.2157 - val_accuracy: 0.8909 - val_loss: 0.2934
Epoch 167/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9118 - loss: 0.2120 - val_accuracy: 0.8922 - val_loss: 0.2919
Epoch 168/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9080 - loss: 0.2229 - val_accuracy: 0.8999 - val_loss: 0.2602
Epoch 169/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9198 - loss: 0.2056 - val_accuracy: 0.8945 - val_loss: 0.2795
Epoch 170/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9197 - loss: 0.2009 - val_accuracy: 0.8863 - val_loss: 0.3091
Epoch 171/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9143 - loss: 0.2139 - val_accuracy: 0.9035 - val_loss: 0.2605
Epoch 172/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9123 - loss: 0.2094 - val_accuracy: 0.9004 - val_loss: 0.2616
Epoch 173/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9170 - loss: 0.2018 - val_accuracy: 0.8995 - val_loss: 0.2727
Epoch 174/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9138 - loss: 0.2112 - val_accuracy: 0.8981 - val_loss: 0.2667
Epoch 175/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9194 - loss: 0.2015 - val_accuracy: 0.9004 - val_loss: 0.2897
Epoch 176/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9192 - loss: 0.2006 - val_accuracy: 0.8972 - val_loss: 0.2951
Epoch 177/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9118 - loss: 0.2047 - val_accuracy: 0.9013 - val_loss: 0.2755
Epoch 178/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9166 - loss: 0.1985 - val_accuracy: 0.8786 - val_loss: 0.3701
Epoch 179/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9119 - loss: 0.2090 - val_accuracy: 0.9081 - val_loss: 0.2494
Epoch 180/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9174 - loss: 0.2023 - val_accuracy: 0.8927 - val_loss: 0.2955
Epoch 181/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9177 - loss: 0.2049 - val_accuracy: 0.8918 - val_loss: 0.3023
Epoch 182/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9224 - loss: 0.1926 - val_accuracy: 0.8895 - val_loss: 0.3134
Epoch 183/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9166 - loss: 0.2085 - val_accuracy: 0.9085 - val_loss: 0.2384
Epoch 184/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9186 - loss: 0.2010 - val_accuracy: 0.9017 - val_loss: 0.2595
Epoch 185/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9210 - loss: 0.2032 - val_accuracy: 0.8904 - val_loss: 0.2814
Epoch 186/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9217 - loss: 0.2065 - val_accuracy: 0.9099 - val_loss: 0.2655
Epoch 187/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9271 - loss: 0.1905 - val_accuracy: 0.9026 - val_loss: 0.2687
Epoch 188/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9219 - loss: 0.1936 - val_accuracy: 0.8809 - val_loss: 0.3246
Epoch 189/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9150 - loss: 0.1986 - val_accuracy: 0.9103 - val_loss: 0.2446
Epoch 190/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9202 - loss: 0.1969 - val_accuracy: 0.8922 - val_loss: 0.3071
Epoch 191/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9209 - loss: 0.2020 - val_accuracy: 0.9017 - val_loss: 0.2608
Epoch 192/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9257 - loss: 0.1872 - val_accuracy: 0.8909 - val_loss: 0.2979
Epoch 193/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9221 - loss: 0.1924 - val_accuracy: 0.8881 - val_loss: 0.2921
Epoch 194/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9165 - loss: 0.2031 - val_accuracy: 0.9085 - val_loss: 0.2643
Epoch 195/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9225 - loss: 0.1938 - val_accuracy: 0.9044 - val_loss: 0.2558
Epoch 196/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9193 - loss: 0.1948 - val_accuracy: 0.8981 - val_loss: 0.2697
Epoch 197/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9209 - loss: 0.1951 - val_accuracy: 0.9026 - val_loss: 0.2755
Epoch 198/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9108 - loss: 0.2163 - val_accuracy: 0.9031 - val_loss: 0.2628
Epoch 199/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9264 - loss: 0.1852 - val_accuracy: 0.9072 - val_loss: 0.2471
Epoch 200/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9222 - loss: 0.1885 - val_accuracy: 0.9130 - val_loss: 0.2581
Epoch 201/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9211 - loss: 0.2095 - val_accuracy: 0.9008 - val_loss: 0.2876
Epoch 202/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9290 - loss: 0.1829 - val_accuracy: 0.8990 - val_loss: 0.3002
Epoch 203/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9203 - loss: 0.1910 - val_accuracy: 0.8895 - val_loss: 0.3043
Epoch 204/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9319 - loss: 0.1773 - val_accuracy: 0.9035 - val_loss: 0.2774
Epoch 205/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9264 - loss: 0.1840 - val_accuracy: 0.8931 - val_loss: 0.2965
Epoch 206/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9187 - loss: 0.1956 - val_accuracy: 0.8986 - val_loss: 0.3133
Epoch 207/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9327 - loss: 0.1728 - val_accuracy: 0.9090 - val_loss: 0.2765
Epoch 208/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9252 - loss: 0.1865 - val_accuracy: 0.9026 - val_loss: 0.2685
Epoch 209/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9261 - loss: 0.1854 - val_accuracy: 0.9044 - val_loss: 0.2768
Epoch 210/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9286 - loss: 0.1886 - val_accuracy: 0.9049 - val_loss: 0.2576
Epoch 211/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9196 - loss: 0.2042 - val_accuracy: 0.9044 - val_loss: 0.2694
Epoch 212/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9283 - loss: 0.1873 - val_accuracy: 0.9040 - val_loss: 0.2799
Epoch 213/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9215 - loss: 0.1992 - val_accuracy: 0.8881 - val_loss: 0.3211
Epoch 214/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9210 - loss: 0.1952 - val_accuracy: 0.9117 - val_loss: 0.2482
Epoch 215/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9239 - loss: 0.1872 - val_accuracy: 0.8981 - val_loss: 0.3241
Epoch 216/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9306 - loss: 0.1835 - val_accuracy: 0.9049 - val_loss: 0.2919
Epoch 217/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9253 - loss: 0.1914 - val_accuracy: 0.9153 - val_loss: 0.2421
Epoch 218/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9306 - loss: 0.1787 - val_accuracy: 0.9108 - val_loss: 0.2607
Epoch 219/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9314 - loss: 0.1812 - val_accuracy: 0.9067 - val_loss: 0.2820
Epoch 220/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9258 - loss: 0.1948 - val_accuracy: 0.9094 - val_loss: 0.2566
Epoch 221/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9318 - loss: 0.1750 - val_accuracy: 0.8986 - val_loss: 0.2932
Epoch 222/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9259 - loss: 0.1842 - val_accuracy: 0.9126 - val_loss: 0.2337
Epoch 223/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9293 - loss: 0.1771 - val_accuracy: 0.9035 - val_loss: 0.2675
Epoch 224/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9296 - loss: 0.1739 - val_accuracy: 0.9126 - val_loss: 0.2448
Epoch 225/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9296 - loss: 0.1848 - val_accuracy: 0.8999 - val_loss: 0.2840
Epoch 226/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9217 - loss: 0.1955 - val_accuracy: 0.9022 - val_loss: 0.2816
Epoch 227/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9290 - loss: 0.1821 - val_accuracy: 0.9117 - val_loss: 0.2435
Epoch 228/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9278 - loss: 0.1833 - val_accuracy: 0.9135 - val_loss: 0.2594
Epoch 229/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9256 - loss: 0.1911 - val_accuracy: 0.9099 - val_loss: 0.2691
Epoch 230/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9333 - loss: 0.1698 - val_accuracy: 0.9153 - val_loss: 0.2545
Epoch 231/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9351 - loss: 0.1680 - val_accuracy: 0.9072 - val_loss: 0.2872
Epoch 232/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9316 - loss: 0.1790 - val_accuracy: 0.9135 - val_loss: 0.2557
Epoch 233/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9294 - loss: 0.1773 - val_accuracy: 0.9022 - val_loss: 0.2830
Epoch 234/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9259 - loss: 0.1839 - val_accuracy: 0.9194 - val_loss: 0.2252
Epoch 235/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9393 - loss: 0.1605 - val_accuracy: 0.8940 - val_loss: 0.3329
Epoch 236/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9299 - loss: 0.1846 - val_accuracy: 0.8918 - val_loss: 0.3170
Epoch 237/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9350 - loss: 0.1750 - val_accuracy: 0.9158 - val_loss: 0.2404
Epoch 238/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9298 - loss: 0.1721 - val_accuracy: 0.9062 - val_loss: 0.2729
Epoch 239/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9291 - loss: 0.1792 - val_accuracy: 0.9094 - val_loss: 0.2694
Epoch 240/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9325 - loss: 0.1647 - val_accuracy: 0.9031 - val_loss: 0.2724
Epoch 241/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9340 - loss: 0.1709 - val_accuracy: 0.9017 - val_loss: 0.2854
Epoch 242/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9332 - loss: 0.1714 - val_accuracy: 0.9130 - val_loss: 0.2560
Epoch 243/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9338 - loss: 0.1781 - val_accuracy: 0.8986 - val_loss: 0.3394
Epoch 244/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9348 - loss: 0.1664 - val_accuracy: 0.9198 - val_loss: 0.2439
Epoch 245/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9291 - loss: 0.1709 - val_accuracy: 0.9090 - val_loss: 0.2930
Epoch 246/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9328 - loss: 0.1748 - val_accuracy: 0.9094 - val_loss: 0.2757
Epoch 247/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9388 - loss: 0.1618 - val_accuracy: 0.9121 - val_loss: 0.2721
Epoch 248/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9330 - loss: 0.1748 - val_accuracy: 0.9149 - val_loss: 0.2410
Epoch 249/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9341 - loss: 0.1718 - val_accuracy: 0.9149 - val_loss: 0.2490
Epoch 250/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9321 - loss: 0.1767 - val_accuracy: 0.9185 - val_loss: 0.2501
Epoch 251/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9332 - loss: 0.1650 - val_accuracy: 0.9099 - val_loss: 0.2660
Epoch 252/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9341 - loss: 0.1702 - val_accuracy: 0.9094 - val_loss: 0.2606
Epoch 253/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9290 - loss: 0.1789 - val_accuracy: 0.9226 - val_loss: 0.2347
Epoch 254/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9405 - loss: 0.1572 - val_accuracy: 0.9144 - val_loss: 0.2610
Epoch 255/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9334 - loss: 0.1723 - val_accuracy: 0.9253 - val_loss: 0.2113
Epoch 256/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9397 - loss: 0.1662 - val_accuracy: 0.9153 - val_loss: 0.2534
Epoch 257/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9283 - loss: 0.1746 - val_accuracy: 0.9216 - val_loss: 0.2318
Epoch 258/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9363 - loss: 0.1693 - val_accuracy: 0.9099 - val_loss: 0.2612
Epoch 259/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9357 - loss: 0.1651 - val_accuracy: 0.9253 - val_loss: 0.2180
Epoch 260/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9350 - loss: 0.1666 - val_accuracy: 0.9203 - val_loss: 0.2696
Epoch 261/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9419 - loss: 0.1556 - val_accuracy: 0.9044 - val_loss: 0.2886
Epoch 262/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9346 - loss: 0.1741 - val_accuracy: 0.9171 - val_loss: 0.2508
Epoch 263/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9365 - loss: 0.1731 - val_accuracy: 0.9153 - val_loss: 0.2472
Epoch 264/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9346 - loss: 0.1592 - val_accuracy: 0.9176 - val_loss: 0.2592
Epoch 265/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9347 - loss: 0.1740 - val_accuracy: 0.9158 - val_loss: 0.2556
Epoch 266/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9401 - loss: 0.1577 - val_accuracy: 0.9207 - val_loss: 0.2430
Epoch 267/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9363 - loss: 0.1682 - val_accuracy: 0.9194 - val_loss: 0.2425
Epoch 268/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9290 - loss: 0.1778 - val_accuracy: 0.9262 - val_loss: 0.2285
Epoch 269/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9385 - loss: 0.1539 - val_accuracy: 0.9162 - val_loss: 0.2626
Epoch 270/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9388 - loss: 0.1591 - val_accuracy: 0.9194 - val_loss: 0.2361
Epoch 271/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9403 - loss: 0.1560 - val_accuracy: 0.9185 - val_loss: 0.2546
Epoch 272/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9406 - loss: 0.1481 - val_accuracy: 0.9126 - val_loss: 0.2480
Epoch 273/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9404 - loss: 0.1690 - val_accuracy: 0.9126 - val_loss: 0.2555
Epoch 274/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9382 - loss: 0.1550 - val_accuracy: 0.9022 - val_loss: 0.2786
Epoch 275/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9346 - loss: 0.1719 - val_accuracy: 0.9198 - val_loss: 0.2494
Epoch 276/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9327 - loss: 0.1682 - val_accuracy: 0.9144 - val_loss: 0.2585
Epoch 277/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9451 - loss: 0.1446 - val_accuracy: 0.9198 - val_loss: 0.2546
Epoch 278/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9487 - loss: 0.1404 - val_accuracy: 0.9130 - val_loss: 0.2721
Epoch 279/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9438 - loss: 0.1490 - val_accuracy: 0.9189 - val_loss: 0.2634
Epoch 280/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9400 - loss: 0.1525 - val_accuracy: 0.9244 - val_loss: 0.2428
Epoch 281/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9431 - loss: 0.1517 - val_accuracy: 0.9103 - val_loss: 0.2626
Epoch 282/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9397 - loss: 0.1573 - val_accuracy: 0.9212 - val_loss: 0.2508
Epoch 283/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9434 - loss: 0.1511 - val_accuracy: 0.9149 - val_loss: 0.2637
Epoch 284/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9380 - loss: 0.1603 - val_accuracy: 0.9248 - val_loss: 0.2275
Epoch 285/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9432 - loss: 0.1554 - val_accuracy: 0.9099 - val_loss: 0.2815
Epoch 286/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9409 - loss: 0.1524 - val_accuracy: 0.9271 - val_loss: 0.2278
Epoch 287/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9415 - loss: 0.1505 - val_accuracy: 0.9244 - val_loss: 0.2406
Epoch 288/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9436 - loss: 0.1468 - val_accuracy: 0.9171 - val_loss: 0.2575
Epoch 289/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9373 - loss: 0.1615 - val_accuracy: 0.9158 - val_loss: 0.2519
Epoch 290/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9349 - loss: 0.1601 - val_accuracy: 0.9194 - val_loss: 0.2367
Epoch 291/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9397 - loss: 0.1544 - val_accuracy: 0.9207 - val_loss: 0.2451
Epoch 292/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9470 - loss: 0.1405 - val_accuracy: 0.9212 - val_loss: 0.2307
Epoch 293/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9402 - loss: 0.1598 - val_accuracy: 0.9198 - val_loss: 0.2316
Epoch 294/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9443 - loss: 0.1445 - val_accuracy: 0.9198 - val_loss: 0.2739
Epoch 295/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9396 - loss: 0.1499 - val_accuracy: 0.9185 - val_loss: 0.2398
Epoch 296/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9346 - loss: 0.1584 - val_accuracy: 0.9257 - val_loss: 0.2471
Epoch 297/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9458 - loss: 0.1455 - val_accuracy: 0.9226 - val_loss: 0.2423
Epoch 298/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9456 - loss: 0.1441 - val_accuracy: 0.9248 - val_loss: 0.2520
Epoch 299/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9423 - loss: 0.1537 - val_accuracy: 0.9198 - val_loss: 0.2549
Epoch 300/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9390 - loss: 0.1586 - val_accuracy: 0.9275 - val_loss: 0.2269
Epoch 301/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9355 - loss: 0.1606 - val_accuracy: 0.9280 - val_loss: 0.2185
Epoch 302/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9429 - loss: 0.1448 - val_accuracy: 0.9284 - val_loss: 0.2337
Epoch 303/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9446 - loss: 0.1449 - val_accuracy: 0.9230 - val_loss: 0.2182
Epoch 304/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9429 - loss: 0.1501 - val_accuracy: 0.9366 - val_loss: 0.2012
Epoch 305/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9516 - loss: 0.1288 - val_accuracy: 0.9203 - val_loss: 0.2535
Epoch 306/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9463 - loss: 0.1428 - val_accuracy: 0.9275 - val_loss: 0.2294
Epoch 307/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9439 - loss: 0.1510 - val_accuracy: 0.9171 - val_loss: 0.2594
Epoch 308/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9427 - loss: 0.1433 - val_accuracy: 0.9207 - val_loss: 0.2438
Epoch 309/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9500 - loss: 0.1411 - val_accuracy: 0.9235 - val_loss: 0.2253
Epoch 310/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9482 - loss: 0.1332 - val_accuracy: 0.9099 - val_loss: 0.2599
Epoch 311/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9396 - loss: 0.1600 - val_accuracy: 0.9176 - val_loss: 0.2644
Epoch 312/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9512 - loss: 0.1360 - val_accuracy: 0.9244 - val_loss: 0.2188
Epoch 313/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9528 - loss: 0.1316 - val_accuracy: 0.9321 - val_loss: 0.2084
Epoch 314/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9443 - loss: 0.1463 - val_accuracy: 0.9216 - val_loss: 0.2342
Epoch 315/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9472 - loss: 0.1383 - val_accuracy: 0.9348 - val_loss: 0.2212
Epoch 316/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9428 - loss: 0.1488 - val_accuracy: 0.9235 - val_loss: 0.2669
Epoch 317/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9462 - loss: 0.1364 - val_accuracy: 0.9343 - val_loss: 0.2065
Epoch 318/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9480 - loss: 0.1346 - val_accuracy: 0.9244 - val_loss: 0.2226
Epoch 319/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9478 - loss: 0.1349 - val_accuracy: 0.9235 - val_loss: 0.2305
Epoch 320/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9416 - loss: 0.1504 - val_accuracy: 0.9289 - val_loss: 0.2268
Epoch 321/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9466 - loss: 0.1419 - val_accuracy: 0.9198 - val_loss: 0.2333
Epoch 322/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9478 - loss: 0.1320 - val_accuracy: 0.9235 - val_loss: 0.2293
Epoch 323/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9418 - loss: 0.1489 - val_accuracy: 0.9198 - val_loss: 0.2272
Epoch 324/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9402 - loss: 0.1518 - val_accuracy: 0.9293 - val_loss: 0.2267
Epoch 325/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9443 - loss: 0.1384 - val_accuracy: 0.9271 - val_loss: 0.2219
Epoch 326/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9512 - loss: 0.1242 - val_accuracy: 0.9321 - val_loss: 0.2219
Epoch 327/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9506 - loss: 0.1358 - val_accuracy: 0.9257 - val_loss: 0.2260
Epoch 328/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9466 - loss: 0.1423 - val_accuracy: 0.9226 - val_loss: 0.2449
Epoch 329/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9459 - loss: 0.1473 - val_accuracy: 0.9289 - val_loss: 0.2400
Epoch 330/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9474 - loss: 0.1361 - val_accuracy: 0.9321 - val_loss: 0.2215
Epoch 331/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9479 - loss: 0.1361 - val_accuracy: 0.9289 - val_loss: 0.2182
Epoch 332/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9495 - loss: 0.1284 - val_accuracy: 0.9293 - val_loss: 0.2285
Epoch 333/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9467 - loss: 0.1433 - val_accuracy: 0.9189 - val_loss: 0.2651
Epoch 334/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9475 - loss: 0.1381 - val_accuracy: 0.9307 - val_loss: 0.2235
Epoch 335/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9507 - loss: 0.1315 - val_accuracy: 0.9253 - val_loss: 0.2369
Epoch 336/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9493 - loss: 0.1355 - val_accuracy: 0.9275 - val_loss: 0.2318
Epoch 337/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9467 - loss: 0.1370 - val_accuracy: 0.9271 - val_loss: 0.2284
Epoch 338/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9524 - loss: 0.1237 - val_accuracy: 0.9289 - val_loss: 0.2388
Epoch 339/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9525 - loss: 0.1230 - val_accuracy: 0.9221 - val_loss: 0.2327
Epoch 340/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9505 - loss: 0.1471 - val_accuracy: 0.9303 - val_loss: 0.2314
Epoch 341/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9522 - loss: 0.1290 - val_accuracy: 0.9221 - val_loss: 0.2452
Epoch 342/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9535 - loss: 0.1239 - val_accuracy: 0.9248 - val_loss: 0.2446
Epoch 343/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9455 - loss: 0.1432 - val_accuracy: 0.9171 - val_loss: 0.2421
Epoch 344/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9470 - loss: 0.1325 - val_accuracy: 0.9189 - val_loss: 0.2561
Epoch 345/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9439 - loss: 0.1439 - val_accuracy: 0.9180 - val_loss: 0.2428
Epoch 346/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9506 - loss: 0.1368 - val_accuracy: 0.9289 - val_loss: 0.2273
Epoch 347/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9505 - loss: 0.1275 - val_accuracy: 0.9194 - val_loss: 0.2354
Epoch 348/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9565 - loss: 0.1184 - val_accuracy: 0.9275 - val_loss: 0.2354
Epoch 349/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9551 - loss: 0.1185 - val_accuracy: 0.9198 - val_loss: 0.2583
Epoch 350/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9543 - loss: 0.1255 - val_accuracy: 0.9235 - val_loss: 0.2255
Epoch 351/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9515 - loss: 0.1297 - val_accuracy: 0.9221 - val_loss: 0.2403
Epoch 352/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9432 - loss: 0.1520 - val_accuracy: 0.9253 - val_loss: 0.2137
Epoch 353/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9428 - loss: 0.1379 - val_accuracy: 0.9271 - val_loss: 0.2327
Epoch 354/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9550 - loss: 0.1232 - val_accuracy: 0.9275 - val_loss: 0.2228
Epoch 355/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9474 - loss: 0.1291 - val_accuracy: 0.9325 - val_loss: 0.2243
Epoch 356/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9555 - loss: 0.1279 - val_accuracy: 0.9298 - val_loss: 0.2300
Epoch 357/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9558 - loss: 0.1180 - val_accuracy: 0.9284 - val_loss: 0.2259
Epoch 358/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9558 - loss: 0.1177 - val_accuracy: 0.9303 - val_loss: 0.2254
Epoch 359/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9518 - loss: 0.1320 - val_accuracy: 0.9284 - val_loss: 0.2318
Epoch 360/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9523 - loss: 0.1250 - val_accuracy: 0.9239 - val_loss: 0.2523
Epoch 361/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9559 - loss: 0.1199 - val_accuracy: 0.9244 - val_loss: 0.2494
Epoch 362/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9549 - loss: 0.1250 - val_accuracy: 0.9275 - val_loss: 0.2260
Epoch 363/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9493 - loss: 0.1404 - val_accuracy: 0.9244 - val_loss: 0.2378
Epoch 364/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9506 - loss: 0.1211 - val_accuracy: 0.9248 - val_loss: 0.2520
Epoch 365/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9516 - loss: 0.1345 - val_accuracy: 0.9312 - val_loss: 0.2304
Epoch 366/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9496 - loss: 0.1313 - val_accuracy: 0.9312 - val_loss: 0.2167
Epoch 367/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9567 - loss: 0.1158 - val_accuracy: 0.9271 - val_loss: 0.2332
Epoch 368/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9565 - loss: 0.1135 - val_accuracy: 0.9266 - val_loss: 0.2416
Epoch 369/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9486 - loss: 0.1327 - val_accuracy: 0.9230 - val_loss: 0.2563
Epoch 370/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9556 - loss: 0.1222 - val_accuracy: 0.9316 - val_loss: 0.2197
Epoch 371/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9537 - loss: 0.1220 - val_accuracy: 0.9307 - val_loss: 0.2189
Epoch 372/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9549 - loss: 0.1189 - val_accuracy: 0.9343 - val_loss: 0.2110
Epoch 373/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9565 - loss: 0.1160 - val_accuracy: 0.9248 - val_loss: 0.2633
Epoch 374/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9541 - loss: 0.1248 - val_accuracy: 0.9303 - val_loss: 0.2500
Epoch 375/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9454 - loss: 0.1471 - val_accuracy: 0.9298 - val_loss: 0.2229
Epoch 376/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9584 - loss: 0.1096 - val_accuracy: 0.9275 - val_loss: 0.2222
Epoch 377/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9564 - loss: 0.1122 - val_accuracy: 0.9289 - val_loss: 0.2304
Epoch 378/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9503 - loss: 0.1274 - val_accuracy: 0.9284 - val_loss: 0.2368
Epoch 379/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9485 - loss: 0.1346 - val_accuracy: 0.9212 - val_loss: 0.2529
Epoch 380/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9516 - loss: 0.1252 - val_accuracy: 0.9167 - val_loss: 0.2680
Epoch 381/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9576 - loss: 0.1199 - val_accuracy: 0.9090 - val_loss: 0.2766
Epoch 382/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9491 - loss: 0.1388 - val_accuracy: 0.9275 - val_loss: 0.2292
Epoch 383/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9526 - loss: 0.1242 - val_accuracy: 0.9235 - val_loss: 0.2423
Epoch 384/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9449 - loss: 0.1382 - val_accuracy: 0.9321 - val_loss: 0.2395
Epoch 385/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9520 - loss: 0.1433 - val_accuracy: 0.9330 - val_loss: 0.2314
Epoch 386/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9531 - loss: 0.1237 - val_accuracy: 0.9298 - val_loss: 0.2125
Epoch 387/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9551 - loss: 0.1207 - val_accuracy: 0.9370 - val_loss: 0.2094
Epoch 388/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9550 - loss: 0.1193 - val_accuracy: 0.9230 - val_loss: 0.2429
Epoch 389/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9571 - loss: 0.1174 - val_accuracy: 0.9230 - val_loss: 0.2319
Epoch 390/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9529 - loss: 0.1252 - val_accuracy: 0.9244 - val_loss: 0.2328
Epoch 391/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9601 - loss: 0.1082 - val_accuracy: 0.9262 - val_loss: 0.2303
Epoch 392/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9515 - loss: 0.1309 - val_accuracy: 0.9244 - val_loss: 0.2220
Epoch 393/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9589 - loss: 0.1136 - val_accuracy: 0.9266 - val_loss: 0.2393
Epoch 394/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9590 - loss: 0.1133 - val_accuracy: 0.9284 - val_loss: 0.2120
Epoch 395/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9556 - loss: 0.1184 - val_accuracy: 0.9280 - val_loss: 0.2259
Epoch 396/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9526 - loss: 0.1298 - val_accuracy: 0.9275 - val_loss: 0.2387
Epoch 397/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9554 - loss: 0.1168 - val_accuracy: 0.9289 - val_loss: 0.2316
Epoch 398/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9599 - loss: 0.1152 - val_accuracy: 0.9266 - val_loss: 0.2353
Epoch 399/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9565 - loss: 0.1160 - val_accuracy: 0.9280 - val_loss: 0.2322
Epoch 400/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9510 - loss: 0.1260 - val_accuracy: 0.9312 - val_loss: 0.2131
Epoch 401/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9532 - loss: 0.1283 - val_accuracy: 0.9289 - val_loss: 0.2377
Epoch 402/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9560 - loss: 0.1136 - val_accuracy: 0.9361 - val_loss: 0.2153
Epoch 403/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9574 - loss: 0.1140 - val_accuracy: 0.9284 - val_loss: 0.2295
Epoch 404/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9598 - loss: 0.1096 - val_accuracy: 0.9244 - val_loss: 0.2543
Epoch 405/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9502 - loss: 0.1278 - val_accuracy: 0.9330 - val_loss: 0.2189
Epoch 406/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9527 - loss: 0.1193 - val_accuracy: 0.9244 - val_loss: 0.2522
Epoch 407/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9476 - loss: 0.1390 - val_accuracy: 0.9235 - val_loss: 0.2548
Epoch 408/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9472 - loss: 0.1366 - val_accuracy: 0.9198 - val_loss: 0.2782
Epoch 409/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9528 - loss: 0.1233 - val_accuracy: 0.9289 - val_loss: 0.2511
Epoch 410/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9537 - loss: 0.1194 - val_accuracy: 0.9334 - val_loss: 0.2103
Epoch 411/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9555 - loss: 0.1196 - val_accuracy: 0.9325 - val_loss: 0.2255
Epoch 412/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9608 - loss: 0.1038 - val_accuracy: 0.9253 - val_loss: 0.2500
Epoch 413/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9569 - loss: 0.1162 - val_accuracy: 0.9257 - val_loss: 0.2241
Epoch 414/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9552 - loss: 0.1178 - val_accuracy: 0.9226 - val_loss: 0.2357
Epoch 415/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9606 - loss: 0.1152 - val_accuracy: 0.9198 - val_loss: 0.2394
Epoch 416/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9571 - loss: 0.1112 - val_accuracy: 0.9280 - val_loss: 0.2572
Epoch 417/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9535 - loss: 0.1199 - val_accuracy: 0.9307 - val_loss: 0.2434
Epoch 418/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9606 - loss: 0.1098 - val_accuracy: 0.9325 - val_loss: 0.2182
Epoch 419/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9560 - loss: 0.1195 - val_accuracy: 0.9330 - val_loss: 0.2106
Epoch 420/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9585 - loss: 0.1057 - val_accuracy: 0.9303 - val_loss: 0.2188
Epoch 421/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9545 - loss: 0.1205 - val_accuracy: 0.9316 - val_loss: 0.2371
Epoch 422/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9598 - loss: 0.1114 - val_accuracy: 0.9212 - val_loss: 0.2452
Epoch 423/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9579 - loss: 0.1119 - val_accuracy: 0.9207 - val_loss: 0.2810
Epoch 424/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9539 - loss: 0.1234 - val_accuracy: 0.9266 - val_loss: 0.2558
Epoch 425/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9686 - loss: 0.0992 - val_accuracy: 0.9271 - val_loss: 0.2385
Epoch 426/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9573 - loss: 0.1174 - val_accuracy: 0.9203 - val_loss: 0.2897
Epoch 427/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9549 - loss: 0.1191 - val_accuracy: 0.9266 - val_loss: 0.2320
Epoch 428/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9610 - loss: 0.1087 - val_accuracy: 0.9185 - val_loss: 0.2632
Epoch 429/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9582 - loss: 0.1112 - val_accuracy: 0.9226 - val_loss: 0.2628
Epoch 430/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9589 - loss: 0.1117 - val_accuracy: 0.9266 - val_loss: 0.2406
Epoch 431/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9639 - loss: 0.0976 - val_accuracy: 0.9271 - val_loss: 0.2313
Epoch 432/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9666 - loss: 0.0970 - val_accuracy: 0.9189 - val_loss: 0.2703
Epoch 433/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9522 - loss: 0.1273 - val_accuracy: 0.9303 - val_loss: 0.2378
Epoch 434/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9620 - loss: 0.1061 - val_accuracy: 0.9375 - val_loss: 0.2187
Epoch 435/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9538 - loss: 0.1292 - val_accuracy: 0.9312 - val_loss: 0.2341
Epoch 436/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9566 - loss: 0.1154 - val_accuracy: 0.9339 - val_loss: 0.2204
Epoch 437/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9528 - loss: 0.1175 - val_accuracy: 0.9312 - val_loss: 0.2363
Epoch 438/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9598 - loss: 0.1089 - val_accuracy: 0.9307 - val_loss: 0.2276
Epoch 439/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9640 - loss: 0.0968 - val_accuracy: 0.9253 - val_loss: 0.2705
Epoch 440/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9555 - loss: 0.1133 - val_accuracy: 0.9239 - val_loss: 0.2454
Epoch 441/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9582 - loss: 0.1084 - val_accuracy: 0.9248 - val_loss: 0.2460
Epoch 442/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9572 - loss: 0.1161 - val_accuracy: 0.9099 - val_loss: 0.3091
Epoch 443/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9547 - loss: 0.1204 - val_accuracy: 0.9271 - val_loss: 0.2396
Epoch 444/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9576 - loss: 0.1185 - val_accuracy: 0.9189 - val_loss: 0.3063
Epoch 445/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9534 - loss: 0.1276 - val_accuracy: 0.9257 - val_loss: 0.2414
Epoch 446/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9637 - loss: 0.0987 - val_accuracy: 0.9248 - val_loss: 0.2431
Epoch 447/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9633 - loss: 0.1003 - val_accuracy: 0.9330 - val_loss: 0.2330
Epoch 448/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9571 - loss: 0.1089 - val_accuracy: 0.9230 - val_loss: 0.2776
Epoch 449/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9637 - loss: 0.1016 - val_accuracy: 0.9262 - val_loss: 0.2658
Epoch 450/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9612 - loss: 0.1022 - val_accuracy: 0.9298 - val_loss: 0.2459
Epoch 451/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9623 - loss: 0.0996 - val_accuracy: 0.9266 - val_loss: 0.2673
Epoch 452/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9582 - loss: 0.1085 - val_accuracy: 0.9253 - val_loss: 0.2277
Epoch 453/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9581 - loss: 0.1063 - val_accuracy: 0.9280 - val_loss: 0.2441
Epoch 454/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9648 - loss: 0.0984 - val_accuracy: 0.9275 - val_loss: 0.2540
Epoch 455/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9610 - loss: 0.1043 - val_accuracy: 0.9348 - val_loss: 0.2182
Epoch 456/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9568 - loss: 0.1244 - val_accuracy: 0.9266 - val_loss: 0.2430
Epoch 457/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9637 - loss: 0.0983 - val_accuracy: 0.9275 - val_loss: 0.2384
Epoch 458/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9611 - loss: 0.1046 - val_accuracy: 0.9280 - val_loss: 0.2689
Epoch 459/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9627 - loss: 0.1073 - val_accuracy: 0.9266 - val_loss: 0.2505
Epoch 460/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9650 - loss: 0.0925 - val_accuracy: 0.9207 - val_loss: 0.2726
Epoch 461/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9559 - loss: 0.1150 - val_accuracy: 0.9343 - val_loss: 0.2237
Epoch 462/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9570 - loss: 0.1174 - val_accuracy: 0.9257 - val_loss: 0.2292
Epoch 463/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9595 - loss: 0.1075 - val_accuracy: 0.9307 - val_loss: 0.2267
Epoch 464/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9653 - loss: 0.0924 - val_accuracy: 0.9339 - val_loss: 0.2349
Epoch 465/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9612 - loss: 0.1061 - val_accuracy: 0.9262 - val_loss: 0.2563
Epoch 466/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9568 - loss: 0.1127 - val_accuracy: 0.9325 - val_loss: 0.2395
Epoch 467/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9624 - loss: 0.1039 - val_accuracy: 0.9239 - val_loss: 0.2784
Epoch 468/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9594 - loss: 0.1061 - val_accuracy: 0.9298 - val_loss: 0.2348
Epoch 469/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9627 - loss: 0.1073 - val_accuracy: 0.9239 - val_loss: 0.2640
Epoch 470/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9604 - loss: 0.1002 - val_accuracy: 0.9307 - val_loss: 0.2590
Epoch 471/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9623 - loss: 0.1042 - val_accuracy: 0.9130 - val_loss: 0.3128
Epoch 472/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9607 - loss: 0.1088 - val_accuracy: 0.9239 - val_loss: 0.2425
Epoch 473/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9623 - loss: 0.0977 - val_accuracy: 0.9325 - val_loss: 0.2400
Epoch 474/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9594 - loss: 0.1055 - val_accuracy: 0.9248 - val_loss: 0.2733
Epoch 475/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9630 - loss: 0.1033 - val_accuracy: 0.9257 - val_loss: 0.2900
Epoch 476/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9593 - loss: 0.1100 - val_accuracy: 0.9289 - val_loss: 0.2374
Epoch 477/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9615 - loss: 0.1002 - val_accuracy: 0.9266 - val_loss: 0.2489
Epoch 478/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9612 - loss: 0.1101 - val_accuracy: 0.9180 - val_loss: 0.2742
Epoch 479/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9598 - loss: 0.1092 - val_accuracy: 0.9235 - val_loss: 0.2588
Epoch 480/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9636 - loss: 0.0973 - val_accuracy: 0.9289 - val_loss: 0.2579
Epoch 481/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9627 - loss: 0.1041 - val_accuracy: 0.9244 - val_loss: 0.2531
Epoch 482/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9619 - loss: 0.1029 - val_accuracy: 0.9253 - val_loss: 0.2486
Epoch 483/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9654 - loss: 0.0970 - val_accuracy: 0.9303 - val_loss: 0.2311
Epoch 484/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9602 - loss: 0.0985 - val_accuracy: 0.9284 - val_loss: 0.2398
Epoch 485/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9628 - loss: 0.1070 - val_accuracy: 0.9262 - val_loss: 0.2482
Epoch 486/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9588 - loss: 0.1091 - val_accuracy: 0.9298 - val_loss: 0.2375
Epoch 487/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9604 - loss: 0.1132 - val_accuracy: 0.9284 - val_loss: 0.2504
Epoch 488/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9633 - loss: 0.1029 - val_accuracy: 0.9253 - val_loss: 0.2590
Epoch 489/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9647 - loss: 0.0996 - val_accuracy: 0.9244 - val_loss: 0.2612
Epoch 490/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9687 - loss: 0.0917 - val_accuracy: 0.9230 - val_loss: 0.2707
Epoch 491/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9665 - loss: 0.0948 - val_accuracy: 0.9321 - val_loss: 0.2324
Epoch 492/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9641 - loss: 0.0912 - val_accuracy: 0.9321 - val_loss: 0.2493
Epoch 493/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9589 - loss: 0.1035 - val_accuracy: 0.9321 - val_loss: 0.2310
Epoch 494/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9660 - loss: 0.0982 - val_accuracy: 0.9271 - val_loss: 0.2571
Epoch 495/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9638 - loss: 0.0964 - val_accuracy: 0.9221 - val_loss: 0.2554
Epoch 496/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.9617 - loss: 0.1053 - val_accuracy: 0.9239 - val_loss: 0.2471
Epoch 497/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9624 - loss: 0.1012 - val_accuracy: 0.9293 - val_loss: 0.2669
Epoch 498/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9609 - loss: 0.1075 - val_accuracy: 0.9325 - val_loss: 0.2352
Epoch 499/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9562 - loss: 0.1125 - val_accuracy: 0.9235 - val_loss: 0.2679
Epoch 500/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9583 - loss: 0.1176 - val_accuracy: 0.9330 - val_loss: 0.2341
Epoch 501/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9660 - loss: 0.0948 - val_accuracy: 0.9303 - val_loss: 0.2449
Epoch 502/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9653 - loss: 0.0959 - val_accuracy: 0.9384 - val_loss: 0.2125
Epoch 503/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9636 - loss: 0.0936 - val_accuracy: 0.9262 - val_loss: 0.2664
Epoch 504/1000
69/69 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9629 - loss: 0.1017 - val_accuracy: 0.9162 - val_loss: 0.2777
Epoch 504: early stopping
Restoring model weights from the end of the best epoch: 304.
In [20]:
# Архитектура нейронной сети

model.summary()
Model: "sequential"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓
┃ Layer (type)                         ┃ Output Shape                ┃         Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩
│ dense (Dense)                        │ (None, 128)                 │          23,936 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout (Dropout)                    │ (None, 128)                 │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_1 (Dense)                      │ (None, 512)                 │          66,048 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_1 (Dropout)                  │ (None, 512)                 │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_2 (Dense)                      │ (None, 256)                 │         131,328 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_2 (Dropout)                  │ (None, 256)                 │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_3 (Dense)                      │ (None, 64)                  │          16,448 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_3 (Dropout)                  │ (None, 64)                  │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_4 (Dense)                      │ (None, 5)                   │             325 │
└──────────────────────────────────────┴─────────────────────────────┴─────────────────┘
 Total params: 714,257 (2.72 MB)
 Trainable params: 238,085 (930.02 KB)
 Non-trainable params: 0 (0.00 B)
 Optimizer params: 476,172 (1.82 MB)
In [21]:
# Оценка нейронной сети по четырем метрикам точности

print(classification_report(y_true, y_pred_classes))
print(f" Accuracy: {round(accuracy_score(y_true, y_pred_classes) * 100, 3)} %")
print(f"Precision: {round(precision_score(y_true, y_pred_classes, average='weighted') * 100, 3)} %")
print(f" F1-Score: {round(f1_score(y_true, y_pred_classes, average='weighted') * 100, 3)} %")
print(f"   Recall: {round(recall_score(y_true, y_pred_classes, average='weighted') * 100, 3)} %")
              precision    recall  f1-score   support

           0       0.96      0.96      0.96       528
           1       0.94      0.95      0.95       561
           2       0.90      0.91      0.91       538
           3       0.86      0.89      0.87       582
           4       0.93      0.88      0.91       550

    accuracy                           0.92      2759
   macro avg       0.92      0.92      0.92      2759
weighted avg       0.92      0.92      0.92      2759

 Accuracy: 91.7 %
Precision: 91.758 %
 F1-Score: 91.712 %
   Recall: 91.7 %
In [22]:
plt.figure(figsize=(12, 5))

# График точности
plt.subplot(1, 2, 1)
plt.plot(trained_model.history['accuracy'], label='train_accuracy')
plt.plot(trained_model.history['val_accuracy'], label='val_accuracy')
plt.title('Accuracy over Epochs')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()

# График потерь
plt.subplot(1, 2, 2)
plt.plot(trained_model.history['loss'], label='train_loss')
plt.plot(trained_model.history['val_loss'], label='val_loss')
plt.title('Loss over Epochs')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.tight_layout()
# plt.savefig('neural_network_training_history.png', dpi=300, bbox_inches='tight')
plt.show()

# plot_model(model, to_file='model_architecture.png', show_shapes=True, show_layer_names=True)
No description has been provided for this image
In [23]:
# Построение матрицы ошибок для нейронной сети

confMat = ConfusionMatrixDisplay(confusion_matrix=confusion_matrix(y_true, y_pred_classes), display_labels=np.arange(5))
confMat.plot(cmap=plt.cm.Blues)
plt.title('Матрица ошибок для Нейронной сети')
# plt.savefig('neural_network_confusion_matrix.png', dpi=300, bbox_inches='tight')
plt.show()
No description has been provided for this image